.quad 0x00affa000000ffff /* 0x082b ring 3 code, 64-bit mode */
.quad 0x00cff2000000ffff /* 0x0833 ring 3 data */
.quad 0x0000000000000000 /* unused */
- .fill 2*NR_CPUS,8,0 /* space for TSS and LDT per CPU */
+ .fill 4*NR_CPUS,8,0 /* space for TSS and LDT per CPU */
.word 0
gdt_descr:
.word 0
idt_descr:
- .word 256*8-1
+ .word 256*16-1
SYMBOL_NAME(idt):
.quad SYMBOL_NAME(idt_table)
return 0;
}
-#if defined(__i386__)
+#if defined(__i386__) /* XXX */
void new_thread(struct exec_domain *d,
unsigned long start_pc,
void arch_init_memory(void)
{
-#ifdef __i386__
+#ifdef __i386__ /* XXX */
unsigned long i;
/*
int phys_proc_id[NR_CPUS];
int logical_proc_id[NR_CPUS];
-#if defined(__i386__)
-
-/* Standard macro to see if a specific flag is changeable */
-static inline int flag_is_changeable_p(u32 flag)
+/* Standard macro to see if a specific flag is changeable. */
+static inline int flag_is_changeable_p(unsigned long flag)
{
- u32 f1, f2;
-
- asm("pushfl\n\t"
- "pushfl\n\t"
- "popl %0\n\t"
- "movl %0,%1\n\t"
- "xorl %2,%0\n\t"
- "pushl %0\n\t"
- "popfl\n\t"
- "pushfl\n\t"
- "popl %0\n\t"
- "popfl\n\t"
+ unsigned long f1, f2;
+
+ asm("pushf\n\t"
+ "pushf\n\t"
+ "pop %0\n\t"
+ "mov %0,%1\n\t"
+ "xor %2,%0\n\t"
+ "push %0\n\t"
+ "popf\n\t"
+ "pushf\n\t"
+ "pop %0\n\t"
+ "popf\n\t"
: "=&r" (f1), "=&r" (f2)
: "ir" (flag));
return flag_is_changeable_p(X86_EFLAGS_ID);
}
-#elif defined(__x86_64__)
-
-#define have_cpuid_p() (1)
-
-#endif
-
void __init get_cpu_vendor(struct cpuinfo_x86 *c)
{
char *v = c->x86_vendor_id;
unsigned long cpu_initialized;
void __init cpu_init(void)
{
-#if defined(__i386__) /* XXX */
int nr = smp_processor_id();
- struct tss_struct * t = &init_tss[nr];
+ struct tss_struct *t = &init_tss[nr];
if ( test_and_set_bit(nr, &cpu_initialized) )
panic("CPU#%d already initialized!!!\n", nr);
printk("Initializing CPU#%d\n", nr);
- t->bitmap = IOBMP_INVALID_OFFSET;
- memset(t->io_bitmap, ~0, sizeof(t->io_bitmap));
-
/* Set up GDT and IDT. */
SET_GDT_ENTRIES(current, DEFAULT_GDT_ENTRIES);
SET_GDT_ADDRESS(current, DEFAULT_GDT_ADDRESS);
- __asm__ __volatile__("lgdt %0": "=m" (*current->mm.gdt));
- __asm__ __volatile__("lidt %0": "=m" (idt_descr));
+ __asm__ __volatile__ ( "lgdt %0" : "=m" (*current->mm.gdt) );
+ __asm__ __volatile__ ( "lidt %0" : "=m" (idt_descr) );
/* No nested task. */
- __asm__("pushfl ; andl $0xffffbfff,(%esp) ; popfl");
+ __asm__ __volatile__ ( "pushf ; andw $0xbfff,(%"__OP"sp) ; popf" );
/* Ensure FPU gets initialised for each domain. */
stts();
/* Set up and load the per-CPU TSS and LDT. */
+ t->bitmap = IOBMP_INVALID_OFFSET;
+ memset(t->io_bitmap, ~0, sizeof(t->io_bitmap));
+#if defined(__i386__)
t->ss0 = __HYPERVISOR_DS;
t->esp0 = get_stack_top();
+#elif defined(__x86_64__)
+ t->rsp0 = get_stack_top();
+#endif
set_tss_desc(nr,t);
load_TR(nr);
- __asm__ __volatile__("lldt %%ax"::"a" (0));
+ __asm__ __volatile__ ( "lldt %%ax" : : "a" (0) );
/* Clear all 6 debug registers. */
-#define CD(register) __asm__("movl %0,%%db" #register ::"r"(0) );
+#define CD(register) __asm__ ( "mov %0,%%db" #register : : "r" (0UL) );
CD(0); CD(1); CD(2); CD(3); /* no db4 and db5 */; CD(6); CD(7);
#undef CD
write_ptbase(¤t->mm);
init_idle_task();
-#endif
}
static void __init do_initcalls(void)
* now safe to make ourselves a private copy.
*/
idt_tables[cpu] = xmalloc_array(idt_entry_t, IDT_ENTRIES);
- memcpy(idt_tables[cpu], idt_table, IDT_ENTRIES*8);
- *(unsigned short *)(&idt_load[0]) = (IDT_ENTRIES*8)-1;
+ memcpy(idt_tables[cpu], idt_table, IDT_ENTRIES*sizeof(idt_entry_t));
+ *(unsigned short *)(&idt_load[0]) = (IDT_ENTRIES*sizeof(idt_entry_t))-1;
*(unsigned long *)(&idt_load[2]) = (unsigned long)idt_tables[cpu];
__asm__ __volatile__ ( "lidt %0" : "=m" (idt_load) );
{
_set_tssldt_desc(
gdt_table + __TSS(n),
- (int)addr,
+ (unsigned long)addr,
offsetof(struct tss_struct, __cacheline_filler) - 1,
- 0x89);
+ 9);
}
void __init trap_init(void)
set_intr_gate(TRAP_bounds,&bounds);
set_intr_gate(TRAP_invalid_op,&invalid_op);
set_intr_gate(TRAP_no_device,&device_not_available);
- set_task_gate(TRAP_double_fault,__DOUBLEFAULT_TSS_ENTRY<<3);
set_intr_gate(TRAP_copro_seg,&coprocessor_segment_overrun);
set_intr_gate(TRAP_invalid_tss,&invalid_TSS);
set_intr_gate(TRAP_no_segment,&segment_not_present);
set_intr_gate(TRAP_deferred_nmi,&nmi);
#if defined(__i386__)
+ set_task_gate(TRAP_double_fault,__DOUBLEFAULT_TSS_ENTRY<<3);
_set_gate(idt_table+HYPERCALL_VECTOR, 14, 1, &hypercall);
#elif defined(__x86_64__)
_set_gate(idt_table+HYPERCALL_VECTOR, 14, 3, &hypercall);
tss->eflags = 2;
tss->bitmap = IOBMP_INVALID_OFFSET;
_set_tssldt_desc(gdt_table+__DOUBLEFAULT_TSS_ENTRY,
- (int)tss, 235, 0x89);
+ (unsigned long)tss, 235, 9);
}
long set_fast_trap(struct exec_domain *p, int idx)
popq %rsi
popq %rdi
addq $8,%rsp
- iret
+ iretq
error_code:
SAVE_ALL
#define LDT_ENTRY_SIZE 8
-#define __DOUBLEFAULT_TSS_ENTRY FIRST_RESERVED_GDT_ENTRY
-
-#define __FIRST_TSS_ENTRY (FIRST_RESERVED_GDT_ENTRY + 8)
-#define __FIRST_LDT_ENTRY (__FIRST_TSS_ENTRY + 1)
-
-#define __TSS(n) (((n)<<1) + __FIRST_TSS_ENTRY)
-#define __LDT(n) (((n)<<1) + __FIRST_LDT_ENTRY)
-
#define load_TR(n) __asm__ __volatile__ ("ltr %%ax" : : "a" (__TSS(n)<<3) )
/*
#if defined(__x86_64__)
+#define __FIRST_TSS_ENTRY (FIRST_RESERVED_GDT_ENTRY + 8)
+#define __FIRST_LDT_ENTRY (__FIRST_TSS_ENTRY + 2)
+
+#define __TSS(n) (((n)<<2) + __FIRST_TSS_ENTRY)
+#define __LDT(n) (((n)<<2) + __FIRST_LDT_ENTRY)
+
#define VALID_CODESEL(_s) ((_s) == FLAT_RING3_CS64 || VALID_SEL(_s))
typedef struct {
((unsigned long)(addr) >> 32); \
} while (0)
-#define _set_tssldt_desc(n,addr,limit,type) ((void)0)
+#define _set_tssldt_desc(desc,addr,limit,type) \
+do { \
+ (desc)[0].a = \
+ ((u32)(addr) << 16) | ((u32)(limit) & 0xFFFF); \
+ (desc)[0].b = \
+ ((u32)(addr) & 0xFF000000U) | \
+ ((u32)(type) << 8) | 0x8000U | \
+ (((u32)(addr) & 0x00FF0000U) >> 16); \
+ (desc)[1].a = (u32)(((unsigned long)(addr)) >> 32); \
+ (desc)[1].b = 0; \
+} while (0)
#elif defined(__i386__)
+#define __DOUBLEFAULT_TSS_ENTRY FIRST_RESERVED_GDT_ENTRY
+
+#define __FIRST_TSS_ENTRY (FIRST_RESERVED_GDT_ENTRY + 8)
+#define __FIRST_LDT_ENTRY (__FIRST_TSS_ENTRY + 1)
+
+#define __TSS(n) (((n)<<1) + __FIRST_TSS_ENTRY)
+#define __LDT(n) (((n)<<1) + __FIRST_LDT_ENTRY)
+
#define VALID_CODESEL(_s) ((_s) == FLAT_RING1_CS || VALID_SEL(_s))
typedef struct desc_struct idt_entry_t;
"movb $0,6(%2)\n\t" \
"movb %%ah,7(%2)\n\t" \
"rorl $16,%%eax" \
- : "=m"(*(n)) : "a" (addr), "r"(n), "ir"(limit), "i"(type))
+ : "=m"(*(n)) : "a" (addr), "r"(n), "ir"(limit), "i"(type|0x80))
#endif
u8 io_bitmap[IOBMP_BYTES+1];
/* Pads the TSS to be cacheline-aligned (total size is 0x2080). */
u8 __cacheline_filler[23];
-};
+} __cacheline_aligned PACKED;
struct trap_bounce {
unsigned long error_code;
* NB. The reserved range is inclusive (that is, both FIRST_RESERVED_GDT_ENTRY
* and LAST_RESERVED_GDT_ENTRY are reserved).
*/
-#define NR_RESERVED_GDT_ENTRIES 40
+#define NR_RESERVED_GDT_ENTRIES 72
#define FIRST_RESERVED_GDT_ENTRY 256
#define LAST_RESERVED_GDT_ENTRY \
(FIRST_RESERVED_GDT_ENTRY + NR_RESERVED_GDT_ENTRIES - 1)